from google.colab import drive
drive.mount('/content/drive')
Drive already mounted at /content/drive; to attempt to forcibly remount, call drive.mount("/content/drive", force_remount=True).
import tensorflow as tf
from tensorflow import keras
# Set GPU memory growth
gpus = tf.config.experimental.list_physical_devices('GPU')
if gpus:
try:
for gpu in gpus:
tf.config.experimental.set_memory_growth(gpu, True)
except RuntimeError as e:
print(e)
import tensorflow as tf
# Force TensorFlow to use the GPU
device_name = tf.test.gpu_device_name()
if device_name != '/device:GPU:0':
raise SystemError('GPU device not found')
print('Found GPU at: {}'.format(device_name))
# Set the GPU as the default device
#with tf.device('/device:GPU:0'):
# Your TensorFlow code here
# Operations within this block will run on the GPU
Found GPU at: /device:GPU:0
#Reading npy File
import numpy as np
X_file_path = '/content/drive/MyDrive/COVID/X.npy'
y_file_path = '/content/drive/MyDrive/COVID/y.npy'
X = np.load(X_file_path)
y = np.load(y_file_path)
#normalised
height, width, channels = 128, 128, 3
X = X.reshape(-1, height, width, channels)
X_normalized = X.astype('float32') / 255.0
del(X)
X_normalized
array([[[[7.54051059e-02, 7.54051059e-02, 7.54051059e-02],
[7.58374408e-02, 7.58374408e-02, 7.58374408e-02],
[7.58272037e-02, 7.58272037e-02, 7.58272037e-02],
...,
[8.45605582e-02, 8.45605582e-02, 8.45605582e-02],
[8.36703405e-02, 8.36703405e-02, 8.36703405e-02],
[1.01976104e-01, 1.01976104e-01, 1.01976104e-01]],
[[8.36397633e-02, 8.36397633e-02, 8.36397633e-02],
[7.84313753e-02, 7.84313753e-02, 7.84313753e-02],
[7.84313753e-02, 7.84313753e-02, 7.84313753e-02],
...,
[8.88633579e-02, 8.88633579e-02, 8.88633579e-02],
[9.02266577e-02, 9.02266577e-02, 9.02266577e-02],
[1.00668937e-01, 1.00668937e-01, 1.00668937e-01]],
[[7.97487721e-02, 7.97487721e-02, 7.97487721e-02],
[8.10202211e-02, 8.10202211e-02, 8.10202211e-02],
[7.84313753e-02, 7.84313753e-02, 7.84313753e-02],
...,
[8.75306353e-02, 8.75306353e-02, 8.75306353e-02],
[9.53636616e-02, 9.53636616e-02, 9.53636616e-02],
[1.07642263e-01, 1.07642263e-01, 1.07642263e-01]],
...,
[[1.36674523e-01, 1.36674523e-01, 1.36674523e-01],
[1.42269298e-01, 1.42269298e-01, 1.42269298e-01],
[1.38861328e-01, 1.38861328e-01, 1.38861328e-01],
...,
[1.43988460e-01, 1.43988460e-01, 1.43988460e-01],
[1.54028803e-01, 1.54028803e-01, 1.54028803e-01],
[1.92323625e-01, 1.92323625e-01, 1.92323625e-01]],
[[4.86775115e-02, 4.86775115e-02, 4.86775115e-02],
[4.73651960e-02, 4.73651960e-02, 4.73651960e-02],
[4.73550819e-02, 4.73550819e-02, 4.73550819e-02],
...,
[4.99488376e-02, 4.99488376e-02, 4.99488376e-02],
[5.13326600e-02, 5.13326600e-02, 5.13326600e-02],
[6.31586984e-02, 6.31586984e-02, 6.31586984e-02]],
[[3.92156886e-03, 3.92156886e-03, 3.92156886e-03],
[3.92156886e-03, 3.92156886e-03, 3.92156886e-03],
[3.92156886e-03, 3.92156886e-03, 3.92156886e-03],
...,
[3.92156886e-03, 3.92156886e-03, 3.92156886e-03],
[3.92156886e-03, 3.92156886e-03, 3.92156886e-03],
[3.92156886e-03, 3.92156886e-03, 3.92156886e-03]]],
[[[6.27910569e-02, 6.27910569e-02, 6.27910569e-02],
[7.58272037e-02, 7.58272037e-02, 7.58272037e-02],
[7.84466937e-02, 7.84466937e-02, 7.84466937e-02],
...,
[0.00000000e+00, 0.00000000e+00, 0.00000000e+00],
[0.00000000e+00, 0.00000000e+00, 0.00000000e+00],
[0.00000000e+00, 0.00000000e+00, 0.00000000e+00]],
[[6.27910569e-02, 6.27910569e-02, 6.27910569e-02],
[7.84313753e-02, 7.84313753e-02, 7.84313753e-02],
[7.84313753e-02, 7.84313753e-02, 7.84313753e-02],
...,
[0.00000000e+00, 0.00000000e+00, 0.00000000e+00],
[0.00000000e+00, 0.00000000e+00, 0.00000000e+00],
[0.00000000e+00, 0.00000000e+00, 0.00000000e+00]],
[[6.36965856e-02, 6.36965856e-02, 6.36965856e-02],
[7.84313753e-02, 7.84313753e-02, 7.84313753e-02],
[7.84313753e-02, 7.84313753e-02, 7.84313753e-02],
...,
[0.00000000e+00, 0.00000000e+00, 0.00000000e+00],
[0.00000000e+00, 0.00000000e+00, 0.00000000e+00],
[0.00000000e+00, 0.00000000e+00, 0.00000000e+00]],
...,
[[6.80147037e-02, 6.80147037e-02, 6.80147037e-02],
[8.23529437e-02, 8.23529437e-02, 8.23529437e-02],
[8.23529437e-02, 8.23529437e-02, 8.23529437e-02],
...,
[0.00000000e+00, 0.00000000e+00, 0.00000000e+00],
[0.00000000e+00, 0.00000000e+00, 0.00000000e+00],
[0.00000000e+00, 0.00000000e+00, 0.00000000e+00]],
[[6.80147037e-02, 6.80147037e-02, 6.80147037e-02],
[8.23529437e-02, 8.23529437e-02, 8.23529437e-02],
[8.23529437e-02, 8.23529437e-02, 8.23529437e-02],
...,
[0.00000000e+00, 0.00000000e+00, 0.00000000e+00],
[0.00000000e+00, 0.00000000e+00, 0.00000000e+00],
[0.00000000e+00, 0.00000000e+00, 0.00000000e+00]],
[[5.97494692e-02, 5.97494692e-02, 5.97494692e-02],
[7.45404437e-02, 7.45404437e-02, 7.45404437e-02],
[7.45404437e-02, 7.45404437e-02, 7.45404437e-02],
...,
[0.00000000e+00, 0.00000000e+00, 0.00000000e+00],
[0.00000000e+00, 0.00000000e+00, 0.00000000e+00],
[0.00000000e+00, 0.00000000e+00, 0.00000000e+00]]],
[[[3.21316719e-01, 3.21316719e-01, 3.21316719e-01],
[8.00515413e-01, 8.00515413e-01, 8.00515413e-01],
[4.67522770e-01, 4.67522770e-01, 4.67522770e-01],
...,
[4.39583033e-01, 4.39583033e-01, 4.39583033e-01],
[5.40415287e-01, 5.40415287e-01, 5.40415287e-01],
[5.84693253e-01, 5.84693253e-01, 5.84693253e-01]],
[[2.53834277e-01, 2.53834277e-01, 2.53834277e-01],
[4.22227979e-01, 4.22227979e-01, 4.22227979e-01],
[1.19562484e-01, 1.19562484e-01, 1.19562484e-01],
...,
[2.17402264e-01, 2.17402264e-01, 2.17402264e-01],
[4.85003471e-01, 4.85003471e-01, 4.85003471e-01],
[8.45347881e-01, 8.45347881e-01, 8.45347881e-01]],
[[1.40510827e-01, 1.40510827e-01, 1.40510827e-01],
[1.82470590e-01, 1.82470590e-01, 1.82470590e-01],
[1.68658085e-02, 1.68658085e-02, 1.68658085e-02],
...,
[1.54319610e-02, 1.54319610e-02, 1.54319610e-02],
[1.44169405e-01, 1.44169405e-01, 1.44169405e-01],
[5.62956095e-01, 5.62956095e-01, 5.62956095e-01]],
...,
[[8.60831976e-01, 8.60831976e-01, 8.60831976e-01],
[8.34951878e-01, 8.34951878e-01, 8.34951878e-01],
[8.12714458e-01, 8.12714458e-01, 8.12714458e-01],
...,
[8.56954634e-01, 8.56954634e-01, 8.56954634e-01],
[8.96272063e-01, 8.96272063e-01, 8.96272063e-01],
[3.80664408e-01, 3.80664408e-01, 3.80664408e-01]],
[[8.88832748e-01, 8.88832748e-01, 8.88832748e-01],
[8.78339529e-01, 8.78339529e-01, 8.78339529e-01],
[8.60033691e-01, 8.60033691e-01, 8.60033691e-01],
...,
[9.60702538e-01, 9.60702538e-01, 9.60702538e-01],
[9.88036215e-01, 9.88036215e-01, 9.88036215e-01],
[3.98131073e-01, 3.98131073e-01, 3.98131073e-01]],
[[9.17631745e-01, 9.17631745e-01, 9.17631745e-01],
[9.08460975e-01, 9.08460975e-01, 9.08460975e-01],
[8.96248698e-01, 8.96248698e-01, 8.96248698e-01],
...,
[9.69929516e-01, 9.69929516e-01, 9.69929516e-01],
[9.99877453e-01, 9.99877453e-01, 9.99877453e-01],
[3.57129991e-01, 3.57129991e-01, 3.57129991e-01]]],
...,
[[[3.31598908e-01, 3.31598908e-01, 3.31598908e-01],
[3.64726275e-01, 3.64726275e-01, 3.64726275e-01],
[3.71170342e-01, 3.71170342e-01, 3.71170342e-01],
...,
[0.00000000e+00, 0.00000000e+00, 0.00000000e+00],
[0.00000000e+00, 0.00000000e+00, 0.00000000e+00],
[0.00000000e+00, 0.00000000e+00, 0.00000000e+00]],
[[3.39859068e-01, 3.39859068e-01, 3.39859068e-01],
[3.60769004e-01, 3.60769004e-01, 3.60769004e-01],
[3.55545342e-01, 3.55545342e-01, 3.55545342e-01],
...,
[0.00000000e+00, 0.00000000e+00, 0.00000000e+00],
[0.00000000e+00, 0.00000000e+00, 0.00000000e+00],
[0.00000000e+00, 0.00000000e+00, 0.00000000e+00]],
[[3.34696680e-01, 3.34696680e-01, 3.34696680e-01],
[3.43765318e-01, 3.43765318e-01, 3.43765318e-01],
[3.46354157e-01, 3.46354157e-01, 3.46354157e-01],
...,
[0.00000000e+00, 0.00000000e+00, 0.00000000e+00],
[0.00000000e+00, 0.00000000e+00, 0.00000000e+00],
[0.00000000e+00, 0.00000000e+00, 0.00000000e+00]],
...,
[[2.47887760e-01, 2.47887760e-01, 2.47887760e-01],
[2.41819859e-01, 2.41819859e-01, 2.41819859e-01],
[2.50527471e-01, 2.50527471e-01, 2.50527471e-01],
...,
[3.69354069e-01, 3.69354069e-01, 3.69354069e-01],
[3.38567108e-01, 3.38567108e-01, 3.38567108e-01],
[3.24630618e-01, 3.24630618e-01, 3.24630618e-01]],
[[2.75755703e-01, 2.75755703e-01, 2.75755703e-01],
[2.66620725e-01, 2.66620725e-01, 2.66620725e-01],
[2.70542294e-01, 2.70542294e-01, 2.70542294e-01],
...,
[7.78290406e-02, 7.78290406e-02, 7.78290406e-02],
[5.15932553e-02, 5.15932553e-02, 5.15932553e-02],
[4.36683223e-02, 4.36683223e-02, 4.36683223e-02]],
[[3.25459570e-01, 3.25459570e-01, 3.25459570e-01],
[3.16314340e-01, 3.16314340e-01, 3.16314340e-01],
[3.21126133e-01, 3.21126133e-01, 3.21126133e-01],
...,
[4.42504883e-04, 4.42504883e-04, 4.42504883e-04],
[0.00000000e+00, 0.00000000e+00, 0.00000000e+00],
[1.04626222e-02, 1.04626222e-02, 1.04626222e-02]]],
[[[6.78870082e-02, 6.78870082e-02, 6.78870082e-02],
[4.82384190e-02, 4.82384190e-02, 4.82384190e-02],
[1.64438281e-02, 1.64438281e-02, 1.64438281e-02],
...,
[0.00000000e+00, 0.00000000e+00, 0.00000000e+00],
[0.00000000e+00, 0.00000000e+00, 0.00000000e+00],
[0.00000000e+00, 0.00000000e+00, 0.00000000e+00]],
[[2.63480400e-03, 2.63480400e-03, 2.63480400e-03],
[3.92150879e-03, 3.92150879e-03, 3.92150879e-03],
[9.16053914e-03, 9.16053914e-03, 9.16053914e-03],
...,
[0.00000000e+00, 0.00000000e+00, 0.00000000e+00],
[0.00000000e+00, 0.00000000e+00, 0.00000000e+00],
[0.00000000e+00, 0.00000000e+00, 0.00000000e+00]],
[[2.58884812e-03, 2.58884812e-03, 2.58884812e-03],
[0.00000000e+00, 0.00000000e+00, 0.00000000e+00],
[2.58884812e-03, 2.58884812e-03, 2.58884812e-03],
...,
[0.00000000e+00, 0.00000000e+00, 0.00000000e+00],
[0.00000000e+00, 0.00000000e+00, 0.00000000e+00],
[0.00000000e+00, 0.00000000e+00, 0.00000000e+00]],
...,
[[2.16266647e-01, 2.16266647e-01, 2.16266647e-01],
[3.50561589e-01, 3.50561589e-01, 3.50561589e-01],
[4.31499720e-01, 4.31499720e-01, 4.31499720e-01],
...,
[4.64001238e-01, 4.64001238e-01, 4.64001238e-01],
[4.09206510e-01, 4.09206510e-01, 4.09206510e-01],
[4.01302069e-01, 4.01302069e-01, 4.01302069e-01]],
[[1.85784310e-01, 1.85784310e-01, 1.85784310e-01],
[3.53109747e-01, 3.53109747e-01, 3.53109747e-01],
[4.32802379e-01, 4.32802379e-01, 4.32802379e-01],
...,
[4.70583022e-01, 4.70583022e-01, 4.70583022e-01],
[4.35324818e-01, 4.35324818e-01, 4.35324818e-01],
[4.13056612e-01, 4.13056612e-01, 4.13056612e-01]],
[[1.80082381e-01, 1.80082381e-01, 1.80082381e-01],
[3.89649749e-01, 3.89649749e-01, 3.89649749e-01],
[4.12634403e-01, 4.12634403e-01, 4.12634403e-01],
...,
[4.75857854e-01, 4.75857854e-01, 4.75857854e-01],
[4.48427260e-01, 4.48427260e-01, 4.48427260e-01],
[4.30055141e-01, 4.30055141e-01, 4.30055141e-01]]],
[[[9.95640993e-01, 9.95640993e-01, 9.95640993e-01],
[9.91926730e-01, 9.91926730e-01, 9.91926730e-01],
[8.16735744e-01, 8.16735744e-01, 8.16735744e-01],
...,
[8.76610518e-01, 8.76610518e-01, 8.76610518e-01],
[8.41569722e-01, 8.41569722e-01, 8.41569722e-01],
[8.28904569e-01, 8.28904569e-01, 8.28904569e-01]],
[[9.94796753e-01, 9.94796753e-01, 9.94796753e-01],
[9.63801682e-01, 9.63801682e-01, 9.63801682e-01],
[2.45036766e-01, 2.45036766e-01, 2.45036766e-01],
...,
[4.86569881e-01, 4.86569881e-01, 4.86569881e-01],
[3.61121029e-01, 3.61121029e-01, 3.61121029e-01],
[2.01266542e-01, 2.01266542e-01, 2.01266542e-01]],
[[9.99114990e-01, 9.99114990e-01, 9.99114990e-01],
[9.66380835e-01, 9.66380835e-01, 9.66380835e-01],
[1.81269929e-01, 1.81269929e-01, 1.81269929e-01],
...,
[4.65557069e-01, 4.65557069e-01, 4.65557069e-01],
[4.77016568e-01, 4.77016568e-01, 4.77016568e-01],
[2.29455993e-01, 2.29455993e-01, 2.29455993e-01]],
...,
[[9.74710703e-01, 9.74710703e-01, 9.74710703e-01],
[9.78664577e-01, 9.78664577e-01, 9.78664577e-01],
[7.18420684e-01, 7.18420684e-01, 7.18420684e-01],
...,
[5.76993465e-01, 5.76993465e-01, 5.76993465e-01],
[6.03487432e-01, 6.03487432e-01, 6.03487432e-01],
[6.75542474e-01, 6.75542474e-01, 6.75542474e-01]],
[[9.89562869e-01, 9.89562869e-01, 9.89562869e-01],
[9.95955944e-01, 9.95955944e-01, 9.95955944e-01],
[8.76736522e-01, 8.76736522e-01, 8.76736522e-01],
...,
[3.79549623e-01, 3.79549623e-01, 3.79549623e-01],
[3.92677635e-01, 3.92677635e-01, 3.92677635e-01],
[3.23636532e-01, 3.23636532e-01, 3.23636532e-01]],
[[9.92151797e-01, 9.92151797e-01, 9.92151797e-01],
[9.93331313e-01, 9.93331313e-01, 9.93331313e-01],
[8.69701505e-01, 8.69701505e-01, 8.69701505e-01],
...,
[3.48862946e-01, 3.48862946e-01, 3.48862946e-01],
[2.90267527e-01, 2.90267527e-01, 2.90267527e-01],
[2.91986674e-01, 2.91986674e-01, 2.91986674e-01]]]],
dtype=float32)
from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test = train_test_split(X_normalized, y, test_size=0.3, random_state=42)
X_test, X_val, y_test, y_val = train_test_split(X_test, y_test, test_size=0.5, random_state=42)
del(X_normalized)
del(y)
print(X_train.shape)
print(X_val.shape)
print(X_test.shape)
(9665, 128, 128, 3) (2072, 128, 128, 3) (2071, 128, 128, 3)
from tensorflow.keras.applications import VGG19
from tensorflow.keras.layers import Flatten, Dense, Activation
from tensorflow.keras.models import Sequential
# Load the VGG19 model with pre-trained weights
base_model = VGG19(weights='imagenet', include_top=False, input_shape=(128, 128, 3))
# Freeze the layers of the VGG19 model
for layer in base_model.layers:
layer.trainable = False
# Create custom model on top of VGG19
model = Sequential()
model.add(base_model)
model.add(Flatten())
model.add(Dense(512))
model.add(Activation("relu"))
model.add(Dense(256))
model.add(Activation("relu"))
model.add(Dense(1))
model.add(Activation("sigmoid"))
# Summary of the model
model.summary()
Model: "sequential"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
vgg19 (Functional) (None, 4, 4, 512) 20024384
flatten (Flatten) (None, 8192) 0
dense (Dense) (None, 512) 4194816
activation (Activation) (None, 512) 0
dense_1 (Dense) (None, 256) 131328
activation_1 (Activation) (None, 256) 0
dense_2 (Dense) (None, 1) 257
activation_2 (Activation) (None, 1) 0
=================================================================
Total params: 24350785 (92.89 MB)
Trainable params: 4326401 (16.50 MB)
Non-trainable params: 20024384 (76.39 MB)
_________________________________________________________________
# Compile the model
model.compile(optimizer='adam', loss='binary_crossentropy', metrics=['accuracy'])
# Set up early stopping
from tensorflow.keras.callbacks import EarlyStopping
early_stopping = tf.keras.callbacks.EarlyStopping(
monitor='val_loss', # Monitor validation loss
patience=10, # Number of epochs without improvement before stopping
restore_best_weights=True # Restore model weights to the best achieved during training
)
# Train with early stopping
hist = model.fit(
X_train, y_train,
epochs=50,
batch_size=32,
validation_data=(X_val, y_val),
callbacks=[early_stopping]
)
Epoch 1/50 303/303 [==============================] - 37s 80ms/step - loss: 0.2532 - accuracy: 0.8982 - val_loss: 0.1446 - val_accuracy: 0.9459 Epoch 2/50 303/303 [==============================] - 20s 65ms/step - loss: 0.1435 - accuracy: 0.9450 - val_loss: 0.8938 - val_accuracy: 0.7761 Epoch 3/50 303/303 [==============================] - 20s 65ms/step - loss: 0.1760 - accuracy: 0.9377 - val_loss: 0.2368 - val_accuracy: 0.9160 Epoch 4/50 303/303 [==============================] - 22s 71ms/step - loss: 0.0952 - accuracy: 0.9649 - val_loss: 0.1720 - val_accuracy: 0.9363 Epoch 5/50 303/303 [==============================] - 22s 72ms/step - loss: 0.0831 - accuracy: 0.9705 - val_loss: 0.1590 - val_accuracy: 0.9421 Epoch 6/50 303/303 [==============================] - 20s 66ms/step - loss: 0.0831 - accuracy: 0.9691 - val_loss: 0.1906 - val_accuracy: 0.9334 Epoch 7/50 303/303 [==============================] - 20s 66ms/step - loss: 0.0772 - accuracy: 0.9707 - val_loss: 0.1138 - val_accuracy: 0.9638 Epoch 8/50 303/303 [==============================] - 22s 72ms/step - loss: 0.0559 - accuracy: 0.9793 - val_loss: 0.1810 - val_accuracy: 0.9479 Epoch 9/50 303/303 [==============================] - 20s 66ms/step - loss: 0.0458 - accuracy: 0.9834 - val_loss: 0.1130 - val_accuracy: 0.9657 Epoch 10/50 303/303 [==============================] - 22s 73ms/step - loss: 0.0416 - accuracy: 0.9835 - val_loss: 0.1101 - val_accuracy: 0.9672 Epoch 11/50 303/303 [==============================] - 22s 72ms/step - loss: 0.0383 - accuracy: 0.9858 - val_loss: 0.1339 - val_accuracy: 0.9653 Epoch 12/50 303/303 [==============================] - 20s 66ms/step - loss: 0.0312 - accuracy: 0.9898 - val_loss: 0.1095 - val_accuracy: 0.9677 Epoch 13/50 303/303 [==============================] - 22s 72ms/step - loss: 0.0353 - accuracy: 0.9880 - val_loss: 0.1311 - val_accuracy: 0.9643 Epoch 14/50 303/303 [==============================] - 22s 72ms/step - loss: 0.0225 - accuracy: 0.9920 - val_loss: 0.1367 - val_accuracy: 0.9638 Epoch 15/50 303/303 [==============================] - 20s 66ms/step - loss: 0.0356 - accuracy: 0.9867 - val_loss: 0.1247 - val_accuracy: 0.9657 Epoch 16/50 303/303 [==============================] - 22s 72ms/step - loss: 0.0362 - accuracy: 0.9861 - val_loss: 0.1228 - val_accuracy: 0.9614 Epoch 17/50 303/303 [==============================] - 22s 72ms/step - loss: 0.0176 - accuracy: 0.9940 - val_loss: 0.1581 - val_accuracy: 0.9609 Epoch 18/50 303/303 [==============================] - 22s 72ms/step - loss: 0.0238 - accuracy: 0.9910 - val_loss: 0.1626 - val_accuracy: 0.9624 Epoch 19/50 303/303 [==============================] - 20s 66ms/step - loss: 0.0208 - accuracy: 0.9922 - val_loss: 0.1415 - val_accuracy: 0.9677 Epoch 20/50 303/303 [==============================] - 20s 66ms/step - loss: 0.0141 - accuracy: 0.9951 - val_loss: 0.1415 - val_accuracy: 0.9686 Epoch 21/50 303/303 [==============================] - 20s 66ms/step - loss: 0.0101 - accuracy: 0.9962 - val_loss: 0.2939 - val_accuracy: 0.9257 Epoch 22/50 303/303 [==============================] - 22s 72ms/step - loss: 0.0278 - accuracy: 0.9897 - val_loss: 0.1470 - val_accuracy: 0.9662
from matplotlib import pyplot as plt
plt.plot(hist.history['accuracy'])
plt.plot(hist.history['val_accuracy'])
plt.title('model accuracy')
plt.ylabel('accuracy')
plt.xlabel('epoch')
plt.legend(['train','test'],loc ='upper left')
plt.show()
#summarize history for loss
plt.plot(hist.history['loss'])
plt.plot(hist.history['val_loss'])
plt.title('model loss')
plt.ylabel('loss')
plt.xlabel('epoch')
plt.legend(['train','val'],loc ='upper left')
plt.show()
model.save('/content/drive/MyDrive/COVID/CovidDetector.h5')
model.save('/content/drive/MyDrive/COVID/CovidDetector.keras')
y_hat = model.predict(X_test)
def predict(y_hat):
y_hat[y_hat >= 0.5] =1
y_hat[y_hat<0.5] =0
return y_hat
y_pred = predict(y_hat)
from sklearn.metrics import accuracy_score
accuracy = accuracy_score(y_test, y_pred)
print(accuracy)
result= []
real_result = []
for i in y_pred:
if i==0:
result.append('NORMAL')
if i==1:
result.append('COVID')
for i in y_pred:
if i==0:
real_result.append('NORMAL')
if i==1:
real_result.append('COVID')
65/65 [==============================] - 3s 54ms/step 0.9724770642201835
from sklearn.metrics import confusion_matrix, classification_report, accuracy_score
labels = ['COVID', 'NORMAL']
report = classification_report(y_test, y_pred, target_names=labels)
print(report)
accuracy = accuracy_score(y_test, y_pred)
print(f"Accuracy: {accuracy}")
precision recall f1-score support
COVID 0.98 0.99 0.98 1551
NORMAL 0.96 0.93 0.94 520
accuracy 0.97 2071
macro avg 0.97 0.96 0.96 2071
weighted avg 0.97 0.97 0.97 2071
Accuracy: 0.9724770642201835
def show_image_with_prediction(image_array, prediction, label):
plt.imshow(image_array)
plt.axis('off')
plt.title(f'Prediction: {prediction}, Label: {label}')
plt.show()
image_arrays = X_test
predictions = result
label = real_result
random_indices = np.random.choice(len(X_test), 50, replace=False)
for index in random_indices:
image_array = X_test[index]
prediction = result[index]
true_label = label[index]
show_image_with_prediction(image_array, prediction, true_label)